# TODO please include pacakges which you did uss (do it at the end)
#knitr::opts_chunk$set(echo = TRUE)
pacman::p_load(tidyverse,
here,
metafor,
emmeans,
orchaRd,
MCMCglmm,
corpcor,
clubSandwich,
MuMIn,
patchwork,
naniar,
GoodmanKruskal,
networkD3,
ggplot2,
ggalluvial,
ggthemr,
cowplot)
# needed for model selection using MuMin within metafor
eval(metafor:::.MuMIn)
dat <- read_csv(here("Data","Full_data.csv"))
# Load custom function to extract data
#source(here("R/functions.R"))
source(here("R/functions_cleanedup.R"))
Removing study with negative values, getting effect sizes from function, ‘flipping’ effect sizes so that all effect sizes are higher values = individuals do better and learning/memory, shifting negative values to positive as lnRR cannot use negative values, assigining human readable terms, and creating VCV of variance
#removing study with negative values as these are unable to be used for lnRR
dat <- droplevels(dat[!dat$First_author == 'Wang',])
#Getting effect sizes
effect_size <- effect_set(CC_n = "CC_n", CC_mean = "CC_mean", CC_SD = "CC_SD",
EC_n = "EC_n", EC_mean = "EC_mean" , EC_SD ="EC_SD",
CS_n = "CS_n", CS_mean = "CS_mean", CS_SD = "CS_SD",
ES_n = "ES_n", ES_mean = "ES_mean", ES_SD = "ES_SD",
data = dat)
#'pure' effect sizes
effect_size2 <- effect_set2(CC_n = "CC_n", CC_mean = "CC_mean", CC_SD = "CC_SD",
EC_n = "EC_n", EC_mean = "EC_mean" , EC_SD ="EC_SD",
CS_n = "CS_n", CS_mean = "CS_mean", CS_SD = "CS_SD",
ES_n = "ES_n", ES_mean = "ES_mean", ES_SD = "ES_SD",
data = dat)
#Removing missing effect sizes
dim(dat)
full_info <- which(complete.cases(effect_size) == TRUE)
dat_effect <- cbind(dat, effect_size, effect_size2)
dat <- dat_effect[full_info, ]
names(dat_effect)
dat <- dat_effect[full_info, ]
dim(dat_effect)
dimentions <- dim(dat)
#Flipping 'lower is better' effect sizes
#flipping lnRR for values where higher = worse
dat$lnRR_Ea <- ifelse(dat$Response_direction == 2, dat$lnRR_E*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_E)) # currently NAswhich causes error
dat$lnRR_Sa <- ifelse(dat$Response_direction == 2, dat$lnRR_S*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_S)) # currently NAswhich causes error
dat$lnRR_ESa <- ifelse(dat$Response_direction == 2, dat$lnRR_ES*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_ES)) # currently NAswhich causes error
#flipping 'pure effect sizes'
dat$lnRR_E2a <- ifelse(dat$Response_direction == 2, dat$lnRR_E2*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_E2)) # currently NAswhich causes error
dat$lnRR_S2a <- ifelse(dat$Response_direction == 2, dat$lnRR_S2*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_S2)) # currently NAswhich causes error
dat$lnRR_ES2a <- ifelse(dat$Response_direction == 2, dat$lnRR_ES2*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_ES2)) # currently NAswhich causes error
dat$lnRR_E3a <- ifelse(dat$Response_direction == 2, dat$lnRR_E3*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_E3)) # currently NAswhich causes error
dat$lnRR_S3a <- ifelse(dat$Response_direction == 2, dat$lnRR_S3*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$lnRR_S3)) # currently NAswhich causes error
#flipping SMD
dat$SMD_Ea <- ifelse(dat$Response_direction == 2, dat$SMD_E*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$SMD_E)) # currently NAswhich causes error
dat$SMD_Sa <- ifelse(dat$Response_direction == 2, dat$SMD_S*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$SMD_S)) # currently NAswhich causes error
dat$SMD_ESa <- ifelse(dat$Response_direction == 2, dat$SMD_ES*-1,ifelse(is.na(dat$Response_direction) == TRUE, NA, dat$SMD_ES))
#rounding down integer sample sizes
dat$CC_n <- floor(dat$CC_n)
dat$EC_n <- floor(dat$EC_n)
dat$CS_n <- floor(dat$CS_n)
dat$ES_n <- floor(dat$CS_n)
#assigning human readable terms
dat <- dat %>% mutate(Type_learning = case_when(Type_learning == 1 ~ "Habituation",
Type_learning == 2 ~ "Conditioning",
Type_learning == 3 ~ "Recognition",
Type_learning == 4 ~ "Unclear"),
Learning_vs_memory = case_when(Learning_vs_memory == 1 ~ "Learning",
Learning_vs_memory == 2 ~ "Memory",
Learning_vs_memory == 1 ~ "Unclear"),
Type_reinforcement = case_when(Type_reinforcement== 1 ~"Appetitive",
Type_reinforcement== 2 ~ "Aversive",
Type_reinforcement== 3 ~ "Not applicable",
Type_reinforcement== 4 ~ "Unclear"),
Type_stress_exposure = case_when(Type_stress_exposure == 1 ~ "Density",
Type_stress_exposure == 2 ~ "Scent",
Type_stress_exposure == 3 ~ "Shock",
Type_stress_exposure == 4 ~ "Exertion",
Type_stress_exposure == 5 ~ "Restraint",
Type_stress_exposure == 6 ~ "MS",
Type_stress_exposure == 7 ~ "Circadian rhythm",
Type_stress_exposure == 8 ~ "Noise",
Type_stress_exposure == 9 ~ "Other",
Type_stress_exposure == 10 ~ "Combination",
Type_stress_exposure == 11 ~ "unclear"),
Age_stress_exposure = case_when(Age_stress_exposure == 1 ~ "Prenatal",
Age_stress_exposure == 2 ~ "Early postnatal",
Age_stress_exposure == 3 ~ "Adolescent",
Age_stress_exposure == 4 ~ "Adult",
Age_stress_exposure == 5 ~ "Unclear"),
Stress_duration = case_when(Stress_duration == 1 ~ "Acute",
Stress_duration == 2 ~ "Chronic",
Stress_duration == 3 ~ "Intermittent",
Stress_duration == 4 ~ "Unclear"),
EE_social = case_when(EE_social == 1 ~ "Social",
EE_social== 2 ~ "Non-social",
EE_social == 3 ~ "Unclear"),
EE_exercise = case_when(EE_exercise == 1 ~ "Exercise",
EE_exercise == 2 ~ "No exercise"),
Age_EE_exposure = case_when(Age_EE_exposure == 1 ~ "Prenatal",
Age_EE_exposure == 2 ~ "Early postnatal",
Age_EE_exposure == 3 ~ "Adolescent",
Age_EE_exposure == 4 ~ "Adult",
Age_EE_exposure == 5 ~ "Unclear"),
Exposure_order = case_when(Exposure_order == 1 ~ "Stress first",
Exposure_order == 2 ~ "Enrichment first",
Exposure_order == 3 ~ "Concurrently",
Exposure_order == 4 ~ "Unclear"),
Age_assay = case_when(Age_assay == 1 ~ "Early postnatal",
Age_assay == 2 ~ "Adolescent",
Age_assay == 3 ~ "Adult",
Age_assay == 4 ~ "Unclear"),
Sex = case_when(Sex == 1 ~ "Female",
Sex == 2 ~ "Male",
Sex == 3 ~ "Mixed",
Sex == 4 ~ "Unclear"))
#making variance VCVs
VCV_E <- impute_covariance_matrix(vi = dat$lnRRV_E, cluster = dat$Study_ID, r = 0.5)
VCV_S <- impute_covariance_matrix(vi = dat$lnRRV_S, cluster = dat$Study_ID, r = 0.5)
VCV_ES <- impute_covariance_matrix(vi = dat$lnRRV_ES, cluster = dat$Study_ID, r = 0.5)
VCV_Ea <- impute_covariance_matrix(vi = dat$SMDV_E, cluster = dat$Study_ID, r = 0.5)
VCV_Sa <- impute_covariance_matrix(vi = dat$SMDV_S, cluster = dat$Study_ID, r = 0.5)
VCV_ESa <- impute_covariance_matrix(vi = dat$SMDV_ES, cluster = dat$Study_ID, r = 0.5)
#Number of effect sizes
length(unique(dat$ES_ID))
## [1] 92
#Number of studies
length(unique(dat$Study_ID))
## [1] 30
#Publication years
min(dat$Year_published)
## [1] 2006
max(dat$Year_published)
## [1] 2021
#TODO please adjust this fig siz and also check - other bigs too see all fig sizes look fine in HTML
#see columns with missing values
vis_miss(dat) +
theme(plot.title = element_text(hjust = 0.5, vjust = 3),
plot.margin = margin(t = 0.5, r = 3, b = 1, l = 1, unit = "cm")) +
ggtitle("Missing data in the selected predictors") #no mising values
#useGoodman and Kruskal’s τ measure of association between categorical predictor variables (function from package GoodmanKruskal: https://cran.r-project.org/web/packages/GoodmanKruskal/vignettes/GoodmanKruskal.html)
#GKmatrix <- GKtauDataframe(subset(dat, select = c("Sex", "Type_learning", "Learning_vs_memory", #"Type_reinforcement", "Type_stress_exposure", "Age_stress_exposure", "Stress_duration", #"EE_social_HR", "EE_exercise", "Age_EE_exposure", "Exposure_order", "Age_assay")))
#plot(GKmatrix)
#simple pairwise contingency tables
table(dat$Type_learning, dat$Type_reinforcement)
table(dat$Age_stress_exposure, dat$Age_EE_exposure)
table(dat$Type_stress_exposure, dat$Age_stress_exposure)
table(dat$Type_stress_exposure, dat$Age_assay)
table(dat$Type_stress_exposure, dat$Stress_duration)
# create a frequency table for Species and Strain variables
freq_1 <- as.data.frame(table(dat$Common_species, dat$Strain)) %>% rename(Species = Var1, Strain = Var2)
is_alluvia_form(as.data.frame(freq_1), axes = 1:2, silent = TRUE)
ggplot(data = freq_1,
aes(axis1 = Species, axis2 = Strain, y = Freq)) +
geom_alluvium(aes(fill = Strain)) +
geom_stratum(aes(fill = Species)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Species", "Strain"), expand = c(0.15, 0.05), position = "top") +
ggtitle("Species and strains used (set1)")
# create frequency table for Sex, Species and Strain
freq_2 <- as.data.frame(table(dat$Sex, dat$Common_species, dat$Strain)) %>% rename(Sex = Var1, Species = Var2, Strain = Var3)
is_alluvia_form(as.data.frame(freq_2), axes = 1:3, silent = TRUE)
ggplot(data = freq_2,
aes(axis1 = Sex, axis2 = Species, axis3 = Strain, y = Freq)) +
geom_alluvium(aes(fill = Sex)) +
geom_flow() +
geom_stratum(aes(fill = Sex)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Sex", "Species", "Strain"), expand = c(0.15, 0.05), position = "top") +
ggtitle("Species, strains and sex used (set2)")
freq_2 %>% filter(Freq != 0) %>% arrange(desc(Freq)) #collapesd table of values, without 0s
freq_ages1 <- as.data.frame(table(dat$Age_stress_exposure, dat$Age_EE_exposure)) %>% rename(Age_stress = Var1, Age_EE = Var2)
is_alluvia_form(as.data.frame(freq_ages1), axes = 1:2, silent = TRUE)
ggplot(data = freq_ages1,
aes(axis1 = Age_stress, axis2 = Age_EE, y = Freq)) +
geom_alluvium(aes(fill = Age_EE)) +
geom_stratum(aes(fill = Age_stress)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Stress", "Enrichment"), expand = c(0.15, 0.05), position = "top") +
ggtitle("Life stages used (set1)")
freq_ages2 <- as.data.frame(table(dat$Age_stress_exposure, dat$Age_EE_exposure, dat$Age_assay)) %>% rename(Age_stress = Var1, Age_EE = Var2, Age_assay = Var3)
is_alluvia_form(as.data.frame(freq_ages2), axes = 1:3, silent = TRUE)
ggplot(data = freq_ages2,
aes(axis1 = Age_stress, axis2 = Age_EE, axis3 = Age_assay, y = Freq)) +
geom_alluvium(aes(fill = Age_stress)) +
geom_flow() +
geom_stratum(aes(fill = Age_stress)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Stress", "Enrichment", "Assay"), expand = c(0.15, 0.05), position = "top") +
ggtitle("Life stages used (set2)")
freq_ages2 %>% filter(Freq != 0) %>% arrange(desc(Freq)) #collapesd table of values, without 0s
freq_ages3 <- as.data.frame(table(dat$Age_stress_exposure, dat$Age_EE_exposure, dat$Age_assay, dat$Exposure_order)) %>% rename(Age_stress = Var1, Age_EE = Var2, Age_assay = Var3, Order = Var4)
is_alluvia_form(as.data.frame(freq_ages3), axes = 1:4, silent = TRUE)
ggplot(data = freq_ages3,
aes(axis1 = Age_stress, axis2 = Age_EE, axis3 = Age_assay, axis4 = Order, y = Freq)) +
geom_alluvium(aes(fill = Age_stress)) +
geom_flow() +
geom_stratum(aes(fill = Age_stress)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Stress", "Enrichment", "Assay", "Order"), expand = c(0.15, 0.05), position = "top") +
ggtitle("Life stages used and order (set3)")
freq_ages2 %>% filter(Freq != 0) %>% arrange(desc(Freq)) #collapesd table of values, without 0s
freq_ages3 %>% filter(Freq != 0) %>% arrange(desc(Freq)) #collapesd table of values, without 0s
freq_stress1 <- as.data.frame(table(dat$Stress_duration, dat$Type_stress_exposure)) %>% rename(Stress_duration = Var1, Stress_type = Var2)
is_alluvia_form(as.data.frame(freq_stress1), axes = 1:2, silent = TRUE)
ggplot(data = freq_stress1,
aes(axis1 = Stress_duration, axis2 = Stress_type, y = Freq)) +
geom_alluvium(aes(fill = Stress_duration)) +
geom_stratum(aes(fill = Stress_duration)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Duration", "Type"), expand = c(0.15, 0.05), position = "top") +
ggtitle("Stress exposure variables (set1)")
freq_stress2 <- as.data.frame(table(dat$Age_stress_exposure, dat$Stress_duration, dat$Type_stress_exposure)) %>% rename(Age_stress = Var1, Duration_stress = Var2, Type_stress = Var3)
is_alluvia_form(as.data.frame(freq_stress2), axes = 1:3, silent = TRUE)
ggplot(data = freq_stress2,
aes(axis1 = Age_stress, axis2 = Duration_stress, axis3 = Type_stress, y = Freq)) +
geom_alluvium(aes(fill = Age_stress)) +
geom_flow() +
geom_stratum(aes(fill = Age_stress)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Age", "Duration", "Type"), expand = c(0.1, 0.1), position = "top") +
ggtitle("Stress exposure variables (set2)")
freq_assay1 <- as.data.frame(table(dat$Learning_vs_memory, dat$Type_reinforcement)) %>% rename(Learning_Memory = Var1, Reinforcement = Var2)
is_alluvia_form(as.data.frame(freq_assay1), axes = 1:2, silent = TRUE)
ggplot(data = freq_assay1,
aes(axis1 = Learning_Memory, axis2 = Reinforcement, y = Freq)) +
geom_alluvium(aes(fill = Learning_Memory)) +
geom_stratum(aes(fill = Learning_Memory)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Learning_Memory", "Reinforcement"), expand = c(0.15, 0.05), position = "top") +
ggtitle("Behavioural assay variables (set1)")
freq_assay2 <- as.data.frame(table(dat$Age_assay, dat$Learning_vs_memory, dat$Type_reinforcement)) %>% rename(Age_assay = Var1, Learning_Memory = Var2, Reinforcement = Var3)
is_alluvia_form(as.data.frame(freq_assay2), axes = 1:3, silent = TRUE)
ggplot(data = freq_assay2,
aes(axis1 = Age_assay, axis2 = Learning_Memory, axis3 = Reinforcement, y = Freq)) +
geom_alluvium(aes(fill = Age_assay)) +
geom_flow() +
geom_stratum(aes(fill = Age_assay)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Age", "Learning_Memory", "Reinforcement"), expand = c(0.1, 0.1), position = "top") +
ggtitle("Behavioural assay variables (set2)")
freq_assay3 <- as.data.frame(table(dat$Type_learning, dat$Learning_vs_memory, dat$Type_reinforcement)) %>% rename(Type = Var1, Learning_Memory = Var2, Reinforcement = Var3)
is_alluvia_form(as.data.frame(freq_assay3), axes = 1:3, silent = TRUE)
ggplot(data = freq_assay3,
aes(axis1 = Type, axis2 = Learning_Memory, axis3 = Reinforcement, y = Freq)) +
geom_alluvium(aes(fill = Type)) +
geom_flow() +
geom_stratum(aes(fill = Type)) +
geom_text(stat = "stratum", aes(label = after_stat(stratum))) +
#theme_minimal() +
theme_void() +
theme(legend.position = "none",
plot.title = element_text(hjust = 0.5, vjust = 3),
axis.title.x = element_text(),
axis.text.x = element_text(face="bold")) +
scale_x_discrete(limits = c("Type", "Learning_Memory", "Reinforcement"), expand = c(0.1, 0.1), position = "top") +
ggtitle("Behavioural assay variables (set3)")
Learning and memory are significantly improved when housed with environmnetal enrichment
mod_E0 <- rma.mv(yi = lnRR_Ea, V = VCV_E, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_E0)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -20.3125 40.6249 48.6249 58.6683 49.0900
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0044 0.0665 30 no Study_ID
## sigma^2.2 0.0485 0.2203 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Heterogeneity:
## Q(df = 91) = 897.6814, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.2146 0.0339 6.3344 91 <.0001 0.1473 0.2818 ***
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_E0)
## I2_total I2_Study_ID I2_ES_ID I2_Strain
## 9.398214e-01 7.853525e-02 8.612861e-01 7.246533e-10
orchard_plot(mod_E0, mod = "Int", xlab = "lnRR", alpha=0.4) + # Orchard plot
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5)+ # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2)+ # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_colour_manual(values = "darkorange")+ # change colours
scale_fill_manual(values="darkorange")+
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 24), # change font sizes
legend.title = element_text(size = 15),
legend.text = element_text(size = 13))
The type of learning/memory response
dat1 <- filter(dat, Type_learning %in% c("Recognition", "Habituation", "Conditioning"))
VCV_E1 <- impute_covariance_matrix(vi = dat1$lnRRV_E, cluster = dat$Study_ID, r = 0.5)
mod_E1 <- rma.mv(yi = lnRR_Ea, V = VCV_E1, mod = ~Type_learning-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat1)
summary(mod_E1)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -18.0617 36.1234 48.1234 63.0553 49.1478
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0081 0.0898 30 no Study_ID
## sigma^2.2 0.0434 0.2083 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 790.2293, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 14.6747, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Type_learningConditioning 0.2514 0.0383 6.5700 89 <.0001 0.1754
## Type_learningHabituation 0.2267 0.1216 1.8639 89 0.0656 -0.0150
## Type_learningRecognition 0.0635 0.0706 0.8997 89 0.3707 -0.0768
## ci.ub
## Type_learningConditioning 0.3275 ***
## Type_learningHabituation 0.4684 .
## Type_learningRecognition 0.2039
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_E1)
## R2_marginal R2_coditional
## 0.08104773 1.00000000
Learning_E <- orchard_plot(mod_E1, mod = "Type_learning", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
xlim(-0.5, 2) +
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Learning_E
Is the assay broadly measuring learning or memory?
mod_E2 <- rma.mv(yi = lnRR_Ea, V = VCV_E, mod = ~Learning_vs_memory-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_E2)
##
## Multivariate Meta-Analysis Model (k = 85; method: REML)
##
## logLik Deviance AIC BIC AICc
## -11.6265 23.2529 33.2529 45.3471 34.0322
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0077 0.0877 30 no Study_ID
## sigma^2.2 0.0310 0.1760 85 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 83) = 652.9299, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 83) = 21.0340, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Learning_vs_memoryLearning 0.2458 0.0475 5.1789 83 <.0001 0.1514
## Learning_vs_memoryMemory 0.1884 0.0360 5.2368 83 <.0001 0.1169
## ci.ub
## Learning_vs_memoryLearning 0.3402 ***
## Learning_vs_memoryMemory 0.2600 ***
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_E2)
## R2_marginal R2_coditional
## 0.01866131 1.00000000
LvsM_E<- orchard_plot(mod_E2, mod = "Learning_vs_memory", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
xlim(-0.5, 2) +
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
LvsM_E
The type of cue used
dat2 <- filter(dat, Type_reinforcement %in% c("Appetitive", "Aversive", "Not applicable"))
VCV_E2 <- impute_covariance_matrix(vi = dat2$lnRRV_E, cluster = dat2$Study_ID, r = 0.5)
mod_E3 <- rma.mv(yi = lnRR_Ea, V = VCV_E2, mod = ~ Type_reinforcement-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat2)
summary(mod_E3)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -18.1008 36.2017 48.2017 63.1335 49.2261
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0061 0.0778 30 no Study_ID
## sigma^2.2 0.0449 0.2119 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 780.4926, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 15.1398, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Type_reinforcementAppetitive 0.1949 0.0721 2.7023 89 0.0082 0.0516
## Type_reinforcementAversive 0.2704 0.0439 6.1559 89 <.0001 0.1831
## Type_reinforcementNot applicable 0.1082 0.0600 1.8030 89 0.0748 -0.0110
## ci.ub
## Type_reinforcementAppetitive 0.3382 **
## Type_reinforcementAversive 0.3577 ***
## Type_reinforcementNot applicable 0.2274 .
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_E3)
## R2_marginal R2_coditional
## 0.08421309 1.00000000
Reinforcement_E <-orchard_plot(mod_E3, mod = "Type_reinforcement", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
xlim(-0.5, 2) +
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Reinforcement_E
Does the form of enrichment include exercise through a running wheel or treadmill?
mod_E5<- rma.mv(yi = lnRR_Ea, V = VCV_E, mod = ~EE_exercise-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_E5)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -20.4952 40.9905 50.9905 63.4895 51.7048
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0054 0.0734 30 no Study_ID
## sigma^2.2 0.0487 0.2208 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 90) = 867.0038, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 90) = 19.4577, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## EE_exerciseExercise 0.2156 0.0421 5.1141 90 <.0001 0.1318 0.2993
## EE_exerciseNo exercise 0.2146 0.0601 3.5723 90 0.0006 0.0952 0.3339
##
## EE_exerciseExercise ***
## EE_exerciseNo exercise ***
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_E5)
## R2_marginal R2_coditional
## 4.108541e-06 1.000000e+00
Exercise_E <-orchard_plot(mod_E5, mod = "EE_exercise", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
xlim(-0.5, 2) +
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Exercise_E
The age at which the individuals were exposed to environmental enrichment.
dat6 <- filter(dat, Age_EE_exposure %in% c("Adult", "Adolescent"))
VCV_E6 <- impute_covariance_matrix(vi = dat6$lnRRV_E, cluster = dat6$Study_ID, r = 0.5)
mod_E6 <- rma.mv(yi = lnRR_Ea, V = VCV_E6, mod = ~Age_EE_exposure-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat6)
summary(mod_E6)
##
## Multivariate Meta-Analysis Model (k = 88; method: REML)
##
## logLik Deviance AIC BIC AICc
## -17.2225 34.4450 44.4450 56.7168 45.1950
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0047 0.0682 29 no Study_ID
## sigma^2.2 0.0457 0.2137 88 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 86) = 834.1449, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 86) = 21.8297, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## Age_EE_exposureAdolescent 0.2115 0.0389 5.4335 86 <.0001 0.1341 0.2889
## Age_EE_exposureAdult 0.2572 0.0684 3.7599 86 0.0003 0.1212 0.3932
##
## Age_EE_exposureAdolescent ***
## Age_EE_exposureAdult ***
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_E6)
## R2_marginal R2_coditional
## 0.006503496 0.999999999
Age_E<- orchard_plot(mod_E6, mod = "Age_EE_exposure", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
xlim(-0.5, 2) +
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Age_E
# TODO - I am saving the results from dredge and reopening it so that we do not need to redo this again and again (of course we need to do this again if data changes). This will become increasingly important as you do mroe complex models or write a lot of models
# TODO - please do this for other mulit-moderator model sections
# filter data so that all K < 5 are removed
dat_Efm <- dat %>%
filter(Type_learning %in% c("Recognition", "Habituation", "Conditioning"),
Type_reinforcement %in% c("Appetitive", "Aversive", "Not applicable"),
EE_social %in% c("Social", "Non-social"),
Age_EE_exposure %in% c("Adult", "Adolescent"))
VCV_Efm <- impute_covariance_matrix(vi = dat_Efm$lnRRV_E, cluster = dat$Study_ID, r = 0.5)
mod_Efm <- rma.mv(yi = lnRR_Sa, V = VCV_Efm, mod = ~Type_learning-1 + Learning_vs_memory-1 + Type_reinforcement-1 + EE_social-1 + EE_exercise-1 + Age_EE_exposure-1 , random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat_Efm)
#summary(mod_Efm)
#r2_ml(mod_Efm)
res_Efm <- dredge(mod_Efm, trace=2)
saveRDS(res_Efm, file = here("Rdata", "res_Efm.rds"))
# also saving the full model and data
saveRDS(mod_Efm, file = here("Rdata", "mod_Efm.rds"))
saveRDS(dat_Efm, file = here("Rdata", "dat_Efm.rds"))
dat_Efm <- readRDS(file = here("Rdata", "dat_Efm.rds"))
mod_Efm <- readRDS(file = here("Rdata", "mod_Efm.rds"))
res_Efm <- readRDS(file = here("Rdata", "res_Efm.rds"))
res_Efm2<- subset(res_Efm, delta <= 6, recalc.weights=FALSE)
importance(res_Efm2)
## Learning_vs_memory Age_EE_exposure EE_exercise
## Sum of weights: 0.94 0.52 0.24
## N containing models: 20 10 8
## Type_reinforcement EE_social Type_learning
## Sum of weights: 0.22 0.21 0.14
## N containing models: 7 7 6
#TODO - probably - you can save leave1out stuff - it saves still like I did above (this will be a good practice so in the future you can plan more complex RMD files )
# funnel plot
Funnel_E<-funnel(mod_Efm, xlab = "lnRR", ylab = "Standard Error")
Funnel_E
| x | y | slab |
|---|---|---|
| -0.2180678 | 0.2451168 | 1 |
| -0.2278966 | 0.2633163 | 2 |
| -0.1000330 | 0.2262804 | 3 |
| 0.0353113 | 0.2766877 | 4 |
| 0.1314139 | 0.2288277 | 5 |
| 0.1330523 | 0.2153569 | 6 |
| 0.0903243 | 0.2214835 | 7 |
| 0.0759004 | 0.2255201 | 8 |
| -0.1306259 | 0.2777978 | 9 |
| -0.5684462 | 0.3881857 | 10 |
| -0.2097999 | 0.2406746 | 11 |
| 0.2303117 | 0.2866805 | 12 |
| -0.1934260 | 0.2025633 | 13 |
| -0.0295005 | 0.2680132 | 14 |
| 0.4701026 | 0.4132400 | 15 |
| -0.7010039 | 0.4580530 | 16 |
| -0.4466954 | 0.2529154 | 17 |
| -0.3862617 | 0.3370803 | 18 |
| -0.0254412 | 0.2554905 | 19 |
| -0.0758518 | 0.2849109 | 20 |
| 0.4305269 | 0.3022538 | 21 |
| -0.5039716 | 0.3065677 | 22 |
| 0.1809597 | 0.3190312 | 23 |
| -0.3640087 | 0.3325057 | 24 |
| -0.1128512 | 0.2391062 | 25 |
| -0.1187448 | 0.2385328 | 26 |
| -0.3323159 | 0.2223962 | 27 |
| -0.1983974 | 0.2208712 | 28 |
| 0.6032277 | 0.2691223 | 29 |
| 0.7376754 | 0.2620829 | 30 |
| -0.0328092 | 0.2265290 | 31 |
| -0.1268449 | 0.4122017 | 32 |
| 0.4843385 | 0.3145766 | 33 |
| -0.3009145 | 0.4095302 | 34 |
| 0.4239129 | 0.2989404 | 35 |
| -0.1176528 | 0.3116144 | 36 |
| -0.1883185 | 0.3819356 | 37 |
| 0.4522413 | 0.3454615 | 38 |
| 0.0158822 | 0.2364568 | 39 |
| -0.2307761 | 0.2767348 | 40 |
| -0.1076550 | 0.2415720 | 41 |
| -0.4466424 | 0.2827798 | 42 |
| -0.1992745 | 0.3486445 | 43 |
| 0.1448037 | 0.3050888 | 44 |
| 0.0396847 | 0.2826076 | 45 |
| 0.0492229 | 0.2364516 | 46 |
| -0.0555896 | 0.2375046 | 47 |
| -0.1454630 | 0.2460295 | 48 |
| -0.4047892 | 0.2294339 | 49 |
| -0.2762168 | 0.2424023 | 50 |
| -0.2422834 | 0.2405256 | 51 |
| -0.0969267 | 0.2371490 | 52 |
| -0.0093555 | 0.3197495 | 53 |
| -0.0274388 | 0.2459432 | 54 |
| 0.3496685 | 0.2613314 | 59 |
| -0.0832221 | 0.3742881 | 60 |
| 0.1099282 | 0.2590096 | 61 |
| 0.1595046 | 0.2761416 | 62 |
| 0.1331808 | 0.2656607 | 63 |
| 0.1683030 | 0.2626792 | 64 |
| -0.4762477 | 0.2799830 | 65 |
| -0.4066442 | 0.2666661 | 66 |
| 0.1543794 | 0.2009047 | 67 |
| 0.1537392 | 0.2049005 | 68 |
| 0.2328418 | 0.2027690 | 69 |
| 0.2019216 | 0.2032698 | 70 |
| 0.2717095 | 0.2102889 | 71 |
| 0.2325251 | 0.2031445 | 72 |
| 0.1137852 | 0.2179847 | 73 |
| -0.6621682 | 0.3046156 | 74 |
| 0.1932835 | 0.2218578 | 75 |
| -0.3129917 | 0.2820648 | 76 |
| 0.0148914 | 0.2352327 | 77 |
| 0.2932524 | 0.4797465 | 78 |
| -0.1776640 | 0.2499787 | 79 |
| 0.2746818 | 0.5637969 | 80 |
| -0.1388391 | 0.2337616 | 81 |
| 0.3207433 | 0.2353494 | 83 |
| 0.0758546 | 0.2263885 | 84 |
| 0.0157814 | 0.2270626 | 85 |
| -1.3072089 | 0.7115350 | 86 |
| 0.0217425 | 0.2557927 | 87 |
| 0.1107238 | 0.4113318 | 88 |
#year published was scaled previously under stress PB
dat_Efm$sqrt_inv_e_n <- with(dat_Efm, sqrt(1/CC_n + 1/EC_n + 1/ES_n + 1/CS_n))
PB_MR_E<- rma.mv(lnRR_Sa, lnRRV_S, mods = ~1 + sqrt_inv_e_n + Year_published + Type_learning + Type_reinforcement + EE_social + EE_exercise + Age_stress_exposure, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain), method = "REML", test = "t",
data = dat_Efm)
estimates_PB_MR_E<- estimates.CI(PB_MR_E)
estimates_PB_MR_E
| estimate | mean | lower | upper |
|---|---|---|---|
| intrcpt | -13.9147389 | -59.3987797 | 31.5693018 |
| sqrt_inv_e_n | -0.1711961 | -1.0441687 | 0.7017764 |
| Year_published | 0.0068520 | -0.0157749 | 0.0294790 |
| Type_learningHabituation | -0.5828922 | -1.0088024 | -0.1569819 |
| Type_learningRecognition | -0.1068229 | -0.4651361 | 0.2514902 |
| Type_reinforcementAversive | 0.2077553 | -0.1500255 | 0.5655360 |
| Type_reinforcementNot applicable | 0.3274613 | -0.1646052 | 0.8195278 |
| EE_socialSocial | 0.0540916 | -0.1861944 | 0.2943777 |
| EE_exerciseNo exercise | -0.0193952 | -0.2891022 | 0.2503118 |
| Age_stress_exposureAdult | -0.1438520 | -0.5799393 | 0.2922354 |
| Age_stress_exposureEarly postnatal | -0.0499447 | -0.4906291 | 0.3907397 |
| Age_stress_exposurePrenatal | -0.1335549 | -0.5816002 | 0.3144903 |
#no effect of inv_effective sample size or year published
#leave-one-out analysis
dat$Study_ID <- as.factor(dat$Study_ID)
LeaveOneOut_effectsize <- list()
for(i in 1:length(levels(dat$Study_ID))){
LeaveOneOut_effectsize[[i]] <- rma.mv(yi = lnRR_Ea, V = lnRRV_E,
random = list(~1 | Study_ID,~1| ES_ID, ~1 | Strain),
method = "REML", data = dat[dat$Study_ID
!= levels(dat$Study_ID)[i], ])}
# writing function for extracting est, ci.lb, and ci.ub from all models
est.func <- function(mod_E0){
df <- data.frame(est = mod_E0$b, lower = mod_E0$ci.lb, upper = mod_E0$ci.ub)
return(df)
}
#using dplyr to form data frame
MA_CVR <- lapply(LeaveOneOut_effectsize, function(x) est.func(x))%>% bind_rows %>% mutate(left_out = levels(dat$Study_ID))
#telling ggplot to stop reordering factors
MA_CVR$left_out<- as.factor(MA_CVR$left_out)
MA_CVR$left_out<-factor(MA_CVR$left_out, levels = MA_CVR$left_out)
#plotting
leaveoneout_E <- ggplot(MA_CVR) +
geom_hline(yintercept = 0, lty = 2, lwd = 1) +
geom_hline(yintercept = mod_E0$ci.lb, lty = 3, lwd = 0.75, colour = "black") +
geom_hline(yintercept = mod_E0$b, lty = 1, lwd = 0.75, colour = "black") +
geom_hline(yintercept = mod_E0$ci.ub, lty = 3, lwd = 0.75, colour = "black") +
geom_pointrange(aes(x = left_out, y = est, ymin = lower, ymax = upper)) +
xlab("Study left out") +
ylab("lnRR, 95% CI") +
coord_flip() +
theme(panel.grid.minor = element_blank())+
theme_bw() + theme(panel.grid.major = element_blank()) +
theme(panel.grid.minor.x = element_blank() ) +
theme(axis.text.y = element_text(size = 6))
leaveoneout_E
dat$Study_ID <- as.integer(dat$Study_ID)
Learning and memory are significantly reduced due to stress. High heterogeneity
mod_S0 <- rma.mv(yi = lnRR_Sa, V = VCV_S, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_S0)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -22.6902 45.3804 53.3804 63.4239 53.8456
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0107 0.1036 30 no Study_ID
## sigma^2.2 0.0504 0.2245 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Heterogeneity:
## Q(df = 91) = 933.9737, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## -0.1155 0.0378 -3.0591 91 0.0029 -0.1906 -0.0405 **
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_S0)
## I2_total I2_Study_ID I2_ES_ID I2_Strain
## 9.474395e-01 1.662608e-01 7.811787e-01 2.947034e-10
orchard_plot(mod_S0, mod = "Int", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 24), # change font sizes
legend.title = element_text(size = 15),
legend.text = element_text(size = 13))
###Metaregression
The type of learning/memory response
dat$Type_learning<-as.factor(dat$Type_learning)
VCV_S1 <- impute_covariance_matrix(vi = dat1$lnRRV_S, cluster = dat$Study_ID, r = 0.5)
mod_S1 <- rma.mv(yi = lnRR_Sa, V = VCV_S1, mod = ~Type_learning-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat1)
summary(mod_S1)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -17.3159 34.6317 46.6317 61.5635 47.6561
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0203 0.1423 30 no Study_ID
## sigma^2.2 0.0351 0.1875 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 732.1642, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 7.3668, p-val = 0.0002
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Type_learningConditioning -0.1052 0.0425 -2.4791 89 0.0151 -0.1896
## Type_learningHabituation -0.5273 0.1191 -4.4285 89 <.0001 -0.7639
## Type_learningRecognition -0.0490 0.0718 -0.6819 89 0.4971 -0.1917
## ci.ub
## Type_learningConditioning -0.0209 *
## Type_learningHabituation -0.2907 ***
## Type_learningRecognition 0.0937
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_S1)
## R2_marginal R2_coditional
## 0.1974766 1.0000000
Learning_S <-orchard_plot(mod_S1, mod = "Type_learning", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Learning_S
Is the assay broadly measuring learning or memory?
mod_S2 <- rma.mv(yi = lnRR_Sa, V = VCV_S, mod = ~Learning_vs_memory-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_S2)
##
## Multivariate Meta-Analysis Model (k = 85; method: REML)
##
## logLik Deviance AIC BIC AICc
## -11.8032 23.6065 33.6065 45.7007 34.3857
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0261 0.1615 30 no Study_ID
## sigma^2.2 0.0267 0.1635 85 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 83) = 676.7747, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 83) = 2.9221, p-val = 0.0594
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Learning_vs_memoryLearning -0.0573 0.0532 -1.0785 83 0.2840 -0.1631
## Learning_vs_memoryMemory -0.1053 0.0436 -2.4135 83 0.0180 -0.1920
## ci.ub
## Learning_vs_memoryLearning 0.0484
## Learning_vs_memoryMemory -0.0185 *
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_S2)
## R2_marginal R2_coditional
## 0.009625517 1.000000000
LvsM_S <- orchard_plot(mod_S2, mod = "Learning_vs_memory", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
LvsM_S
The type of cue used
VCV_S2 <- VCV_E <- impute_covariance_matrix(vi = dat2$lnRRV_S, cluster = dat$Study_ID, r = 0.5)
mod_S3 <- rma.mv(yi = lnRR_Sa, V = VCV_S2, mod = ~ Type_reinforcement-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat2)
summary(mod_S3)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -22.1655 44.3311 56.3311 71.2629 57.3555
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0126 0.1124 30 no Study_ID
## sigma^2.2 0.0506 0.2248 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 911.1858, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 3.6161, p-val = 0.0162
##
## Model Results:
##
## estimate se tval df pval
## Type_reinforcementAppetitive -0.1980 0.0836 -2.3685 89 0.0200
## Type_reinforcementAversive -0.0747 0.0489 -1.5264 89 0.1305
## Type_reinforcementNot applicable -0.1386 0.0660 -2.0995 89 0.0386
## ci.lb ci.ub
## Type_reinforcementAppetitive -0.3640 -0.0319 *
## Type_reinforcementAversive -0.1719 0.0225
## Type_reinforcementNot applicable -0.2698 -0.0074 *
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_S3)
## R2_marginal R2_coditional
## 0.03676953 1.00000000
Reinforcement_S <-orchard_plot(mod_S3, mod = "Type_reinforcement", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Reinforcement_S
The type of stress manipulation
dat3 <- filter(dat, Type_stress_exposure %in% c("Restraint", "Noise", "MS", "Combination"))
VCV_S3 <- impute_covariance_matrix(vi = dat3$lnRRV_S, cluster = dat$Study_ID, r = 0.5)
mod_S4 <- rma.mv(yi = lnRR_Sa, V = VCV_S3, mod = ~Type_stress_exposure-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat3)
summary(mod_S4)
##
## Multivariate Meta-Analysis Model (k = 85; method: REML)
##
## logLik Deviance AIC BIC AICc
## -21.3621 42.7241 56.7241 73.4853 58.2584
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0236 0.1537 25 no Study_ID
## sigma^2.2 0.0527 0.2295 85 no ES_ID
## sigma^2.3 0.0000 0.0000 4 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 81) = 1030.5305, p-val < .0001
##
## Test of Moderators (coefficients 1:4):
## F(df1 = 4, df2 = 81) = 2.0257, p-val = 0.0985
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Type_stress_exposureCombination -0.0558 0.1132 -0.4927 81 0.6236 -0.2811
## Type_stress_exposureMS -0.0607 0.0691 -0.8780 81 0.3825 -0.1982
## Type_stress_exposureNoise -0.1468 0.1248 -1.1765 81 0.2428 -0.3950
## Type_stress_exposureRestraint -0.2175 0.0878 -2.4764 81 0.0154 -0.3922
## ci.ub
## Type_stress_exposureCombination 0.1695
## Type_stress_exposureMS 0.0768
## Type_stress_exposureNoise 0.1015
## Type_stress_exposureRestraint -0.0427 *
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_S4)
## R2_marginal R2_coditional
## 0.05175205 1.00000000
Stressor<- orchard_plot(mod_S4, mod = "Type_stress_exposure", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Stressor
VCV_S3a <- impute_covariance_matrix(vi = dat$lnRRV_S, cluster = dat$Study_ID, r = 0.5)
mod_S5 <-rma.mv(yi = lnRR_Sa, V = VCV_S3a, mod = ~Age_stress_exposure-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_S5)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -20.8850 41.7699 55.7699 73.1113 57.1699
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0021 0.0455 30 no Study_ID
## sigma^2.2 0.0531 0.2304 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 88) = 830.4810, p-val < .0001
##
## Test of Moderators (coefficients 1:4):
## F(df1 = 4, df2 = 88) = 4.4779, p-val = 0.0024
##
## Model Results:
##
## estimate se tval df pval
## Age_stress_exposureAdolescent 0.0244 0.1336 0.1825 88 0.8556
## Age_stress_exposureAdult -0.2481 0.0693 -3.5790 88 0.0006
## Age_stress_exposureEarly postnatal -0.0645 0.0461 -1.3997 88 0.1651
## Age_stress_exposurePrenatal -0.1379 0.0782 -1.7634 88 0.0813
## ci.lb ci.ub
## Age_stress_exposureAdolescent -0.2411 0.2899
## Age_stress_exposureAdult -0.3859 -0.1104 ***
## Age_stress_exposureEarly postnatal -0.1561 0.0271
## Age_stress_exposurePrenatal -0.2932 0.0175 .
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_S5)
## R2_marginal R2_coditional
## 0.0971388 1.0000000
Age_S <- orchard_plot(mod_S5, mod = "Age_stress_exposure", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Age_S
How long was the stress applied for (chronic = every day for 7 days or more)? This has the highest marginal R2
dat4 <- filter(dat, Stress_duration %in% c("Chronic", "Acute"))
VCV_S4 <- impute_covariance_matrix(vi = dat4$lnRRV_S, cluster = dat$Study_ID, r = 0.5)
mod_S6 <-rma.mv(yi = lnRR_Sa, V = VCV_S4, mod = ~Stress_duration-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat4)
summary(mod_S6)
##
## Multivariate Meta-Analysis Model (k = 89; method: REML)
##
## logLik Deviance AIC BIC AICc
## -23.6341 47.2682 57.2682 69.5978 58.0090
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0173 0.1314 29 no Study_ID
## sigma^2.2 0.0514 0.2267 89 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 87) = 1164.1990, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 87) = 4.9979, p-val = 0.0088
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## Stress_durationAcute 0.0025 0.0866 0.0292 87 0.9768 -0.1695 0.1746
## Stress_durationChronic -0.1509 0.0477 -3.1593 87 0.0022 -0.2458 -0.0559
##
## Stress_durationAcute
## Stress_durationChronic **
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_S6)
## R2_marginal R2_coditional
## 0.05879435 1.00000000
Duration_S <- orchard_plot(mod_S6, mod = "Stress_duration", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
Duration_S
#selecting moderator levels with k >=5
dat_Sfm <- dat %>%
filter(Type_learning %in% c("Recognition", "Habituation", "Conditioning"),
Type_reinforcement %in% c("Appetitive", "Aversive", "Not applicable"),
Type_stress_exposure %in% c("Restraint", "Noise", "MS", "Combination"),
Stress_duration %in% c("Chronic", "Acute"))
VCV_Sfm <- impute_covariance_matrix(vi = dat_Sfm$lnRRV_E, cluster = dat$Study_ID, r = 0.5)
mod_Sfm <- rma.mv(yi = lnRR_Sa, V = VCV_Sfm, mod = ~ Type_learning -1 + Learning_vs_memory + Type_reinforcement + Type_stress_exposure + Age_stress_exposure + Stress_duration, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat_Sfm)
#summary(mod_Sfm)
#r2_ml(mod_Sfm)
res_Sfm <- dredge(mod_Sfm, trace=2)
##
|
| | 0%
|
|= | 2%
|
|== | 3%
|
|=== | 5%
|
|==== | 6%
|
|===== | 8%
|
|======= | 9%
|
|======== | 11%
|
|========= | 12%
|
|========== | 14%
|
|=========== | 16%
|
|============ | 17%
|
|============= | 19%
|
|============== | 20%
|
|=============== | 22%
|
|================ | 23%
|
|================== | 25%
|
|=================== | 27%
|
|==================== | 28%
|
|===================== | 30%
|
|====================== | 31%
|
|======================= | 33%
|
|======================== | 34%
|
|========================= | 36%
|
|========================== | 38%
|
|=========================== | 39%
|
|============================ | 41%
|
|============================== | 42%
|
|=============================== | 44%
|
|================================ | 45%
|
|================================= | 47%
|
|================================== | 48%
|
|=================================== | 50%
|
|==================================== | 52%
|
|===================================== | 53%
|
|====================================== | 55%
|
|======================================= | 56%
|
|======================================== | 58%
|
|========================================== | 59%
|
|=========================================== | 61%
|
|============================================ | 62%
|
|============================================= | 64%
|
|============================================== | 66%
|
|=============================================== | 67%
|
|================================================ | 69%
|
|================================================= | 70%
|
|================================================== | 72%
|
|=================================================== | 73%
|
|==================================================== | 75%
|
|====================================================== | 77%
|
|======================================================= | 78%
|
|======================================================== | 80%
|
|========================================================= | 81%
|
|========================================================== | 83%
|
|=========================================================== | 84%
|
|============================================================ | 86%
|
|============================================================= | 88%
|
|============================================================== | 89%
|
|=============================================================== | 91%
|
|================================================================= | 92%
|
|================================================================== | 94%
|
|=================================================================== | 95%
|
|==================================================================== | 97%
|
|===================================================================== | 98%
res_Sfm2<- subset(res_Sfm, delta <= 6, recalc.weights=FALSE)
importance(res_Sfm2)
## Learning_vs_memory Stress_duration Type_learning
## Sum of weights: 0.95 0.95 0.20
## N containing models: 8 8 4
## Age_stress_exposure Type_stress_exposure
## Sum of weights: 0.16 0.14
## N containing models: 2 2
## Type_reinforcement
## Sum of weights: 0.14
## N containing models: 2
# funnel plot
Funnel_S <- funnel(mod_Sfm, xlab = "lnRR", ylab = "Standard Error")
Funnel_S
| x | y | slab |
|---|---|---|
| -0.0063448 | 0.2523234 | 1 |
| 0.1522975 | 0.1052420 | 2 |
| 0.1051174 | 0.1448064 | 3 |
| 0.0906935 | 0.1509082 | 4 |
| -0.2627846 | 0.2160459 | 5 |
| 0.1773270 | 0.2663405 | 6 |
| -0.1604204 | 0.1316077 | 7 |
| 0.0035051 | 0.2193621 | 8 |
| 0.4529839 | 0.4034423 | 9 |
| -0.7181227 | 0.4492337 | 10 |
| -0.4941424 | 0.2331461 | 11 |
| -0.4337087 | 0.3225121 | 12 |
| -0.0728881 | 0.2359370 | 13 |
| -0.0134077 | 0.2618911 | 14 |
| 0.4929710 | 0.2806601 | 15 |
| -0.4111992 | 0.2869996 | 16 |
| 0.1393037 | 0.2981475 | 17 |
| -0.4056647 | 0.3125237 | 18 |
| -0.1241790 | 0.2132125 | 19 |
| -0.1300726 | 0.2125692 | 20 |
| 0.4922309 | 0.7894096 | 23 |
| -0.0438840 | 0.2349226 | 24 |
| -0.2337526 | 0.1942307 | 25 |
| -0.0998341 | 0.1924826 | 26 |
| 0.2030273 | 0.2112175 | 27 |
| 0.3678032 | 0.2057938 | 28 |
| -0.1069992 | 0.3931487 | 29 |
| 0.5041841 | 0.2891610 | 30 |
| -0.2810688 | 0.3903469 | 31 |
| 0.4740868 | 0.2728132 | 32 |
| -0.0674789 | 0.2866449 | 33 |
| -0.1381446 | 0.3618516 | 34 |
| 0.5024152 | 0.3231182 | 35 |
| 0.0301901 | 0.1996970 | 36 |
| -0.2109305 | 0.2474627 | 37 |
| -0.0574811 | 0.2083717 | 38 |
| -0.3964685 | 0.2550023 | 39 |
| -0.2780240 | 0.3372247 | 40 |
| 0.0660542 | 0.2919704 | 41 |
| 0.0595304 | 0.2540132 | 42 |
| 0.0993968 | 0.2024132 | 43 |
| 0.2344615 | 0.1633627 | 44 |
| 0.1749164 | 0.1689548 | 45 |
| -0.1506041 | 0.1720813 | 46 |
| 0.0082966 | 0.1826094 | 47 |
| -0.2224378 | 0.2061753 | 48 |
| -0.0467528 | 0.2032274 | 49 |
| -0.0080321 | 0.2298977 | 50 |
| 0.0042128 | 0.1075886 | 51 |
| 0.1341868 | 0.1922788 | 56 |
| -0.2987038 | 0.3298009 | 57 |
| -0.1055534 | 0.1891111 | 58 |
| -0.0559771 | 0.2119698 | 59 |
| -0.2607552 | 0.1924699 | 60 |
| -0.2256330 | 0.1883333 | 61 |
| -0.4754344 | 0.2592312 | 62 |
| -0.3755026 | 0.2455050 | 63 |
| 0.0510926 | 0.1851194 | 64 |
| 0.0504524 | 0.1894484 | 65 |
| 0.1598832 | 0.1886208 | 66 |
| 0.1289630 | 0.1891591 | 67 |
| 0.1987509 | 0.1966824 | 68 |
| 0.1595665 | 0.1890244 | 69 |
| 0.1145986 | 0.1905981 | 70 |
| -0.6310266 | 0.2862736 | 71 |
| 0.2244251 | 0.1959152 | 72 |
| -0.2818501 | 0.2621497 | 73 |
| 0.0291993 | 0.1982460 | 74 |
| 0.3075603 | 0.4627351 | 75 |
| -0.1274901 | 0.2180621 | 76 |
| 0.3248557 | 0.5503896 | 77 |
| -0.3169703 | 0.1540782 | 78 |
| 0.1481498 | 0.1595204 | 80 |
| 0.0766680 | 0.2001551 | 81 |
| 0.0469230 | 0.2017903 | 82 |
#calculating inv effective sample size for use in full meta-regression
dat_Sfm$sqrt_inv_e_n <- with(dat_Sfm, sqrt(1/CC_n + 1/EC_n + 1/ES_n + 1/CS_n))
#time lag bias and eggers regression
dat_Sfm$c_Year_published <- as.vector(scale(dat_Sfm$Year_published, scale = F))
PB_MR_S<- rma.mv(lnRR_Sa, lnRRV_S, mods = ~1 + sqrt_inv_e_n + c_Year_published + Type_learning + Type_reinforcement + Type_stress_exposure + Age_stress_exposure, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
method = "REML",
test = "t",
data = dat_Sfm,
control=list(optimizer="optim", optmethod="Nelder-Mead"))
estimates_PB_MR_S<- estimates.CI(PB_MR_S)
estimates_PB_MR_S
| estimate | mean | lower | upper |
|---|---|---|---|
| intrcpt | -0.0534910 | -0.9384247 | 0.8314427 |
| sqrt_inv_e_n | 0.1476637 | -1.2155241 | 1.5108514 |
| c_Year_published | 0.0055548 | -0.0243577 | 0.0354673 |
| Type_learningHabituation | -0.6361107 | -1.0644764 | -0.2077451 |
| Type_learningRecognition | -0.1068719 | -0.4754180 | 0.2616742 |
| Type_reinforcementAversive | 0.1220611 | -0.1344910 | 0.3786133 |
| Type_reinforcementNot applicable | 0.2373222 | -0.1952699 | 0.6699142 |
| Type_stress_exposureMS | -0.0009972 | -0.8170353 | 0.8150410 |
| Type_stress_exposureNoise | 0.1814221 | -0.8280961 | 1.1909402 |
| Type_stress_exposureRestraint | -0.1312725 | -0.5964008 | 0.3338558 |
| Age_stress_exposureAdult | -0.2037280 | -0.6376774 | 0.2302214 |
| Age_stress_exposureEarly postnatal | -0.2129606 | -1.0867878 | 0.6608667 |
| Age_stress_exposurePrenatal | -0.1999017 | -0.7352586 | 0.3354553 |
#no effect of inv_effective sample size or year published
#leave-one-out analysis
dat$Study_ID <- as.factor(dat$Study_ID)
LeaveOneOut_effectsize <- list()
for(i in 1:length(levels(dat$Study_ID))){
LeaveOneOut_effectsize[[i]] <- rma.mv(yi = lnRR_Sa, V = lnRRV_S,
random = list(~1 | Study_ID,~1| ES_ID, ~1 | Strain),
method = "REML", data = dat[dat$Study_ID
!= levels(dat$Study_ID)[i], ])}
# writing function for extracting est, ci.lb, and ci.ub from all models
est.func <- function(mod_E0){
df <- data.frame(est = mod_E0$b, lower = mod_E0$ci.lb, upper = mod_E0$ci.ub)
return(df)
}
#using dplyr to form data frame
MA_CVR <- lapply(LeaveOneOut_effectsize, function(x) est.func(x))%>% bind_rows %>% mutate(left_out = levels(dat$Study_ID))
#telling ggplot to stop reordering factors
MA_CVR$left_out<- as.factor(MA_CVR$left_out)
MA_CVR$left_out<-factor(MA_CVR$left_out, levels = MA_CVR$left_out)
#plotting
leaveoneout_S <- ggplot(MA_CVR) +
geom_hline(yintercept = 0, lty = 2, lwd = 1) +
geom_hline(yintercept = mod_S0$ci.lb, lty = 3, lwd = 0.75, colour = "black") +
geom_hline(yintercept = mod_S0$b, lty = 1, lwd = 0.75, colour = "black") +
geom_hline(yintercept = mod_S0$ci.ub, lty = 3, lwd = 0.75, colour = "black") +
geom_pointrange(aes(x = left_out, y = est, ymin = lower, ymax = upper)) +
xlab("Study left out") +
ylab("lnRR, 95% CI") +
coord_flip() +
theme(panel.grid.minor = element_blank())+
theme_bw() + theme(panel.grid.major = element_blank()) +
theme(panel.grid.minor.x = element_blank() ) +
theme(axis.text.y = element_text(size = 6))
leaveoneout_S
dat$Study_ID <- as.integer(dat$Study_ID)
Enriched and stress animals are better at learning and memory.
mod_ES0 <- rma.mv(yi = lnRR_ESa, V = VCV_ES, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_ES0)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -50.9607 101.9214 109.9214 119.9648 110.3865
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0279 0.1669 30 no Study_ID
## sigma^2.2 0.0311 0.1765 92 no ES_ID
## sigma^2.3 0.0048 0.0690 6 no Strain
##
## Test for Heterogeneity:
## Q(df = 91) = 307.1213, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.1578 0.0678 2.3273 91 0.0222 0.0231 0.2925 *
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_ES0)
## I2_total I2_Study_ID I2_ES_ID I2_Strain
## 0.82109711 0.35875892 0.40100786 0.06133033
orchard_plot(mod_ES0, mod = "Int", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 24), # change font sizes
legend.title = element_text(size = 15),
legend.text = element_text(size = 13))
dat_outliers <- dat %>%
filter(ES_ID != 88)
VCV_ES_outliers <- impute_covariance_matrix(vi = dat_outliers$lnRRV_E, cluster = dat$Study_ID, r = 0.5)
mod_ESoutlier <- rma.mv(yi = lnRR_ESa, V = VCV_ES_outliers, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat_outliers)
summary(mod_ES0)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -50.9607 101.9214 109.9214 119.9648 110.3865
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0279 0.1669 30 no Study_ID
## sigma^2.2 0.0311 0.1765 92 no ES_ID
## sigma^2.3 0.0048 0.0690 6 no Strain
##
## Test for Heterogeneity:
## Q(df = 91) = 307.1213, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.1578 0.0678 2.3273 91 0.0222 0.0231 0.2925 *
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
orchard_plot(mod_ESoutlier, mod = "Int", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 24), # change font sizes
legend.title = element_text(size = 15),
legend.text = element_text(size = 13))
The type of learning/memory response
VCV_ES1 <- impute_covariance_matrix(vi = dat1$lnRRV_ES, cluster = dat$Study_ID, r = 0.5)
mod_ES1 <- rma.mv(yi = lnRR_ESa, V = VCV_ES1, mod = ~Type_learning-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat1)
summary(mod_ES1)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -48.9339 97.8678 109.8678 124.7996 110.8921
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0344 0.1854 30 no Study_ID
## sigma^2.2 0.0246 0.1569 92 no ES_ID
## sigma^2.3 0.0040 0.0633 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 293.7418, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 3.5124, p-val = 0.0184
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Type_learningConditioning 0.1900 0.0696 2.7284 89 0.0077 0.0516
## Type_learningHabituation 0.3107 0.1558 1.9946 89 0.0491 0.0012
## Type_learningRecognition 0.0256 0.0896 0.2852 89 0.7761 -0.1525
## ci.ub
## Type_learningConditioning 0.3284 **
## Type_learningHabituation 0.6202 *
## Type_learningRecognition 0.2036
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES1)
## R2_marginal R2_coditional
## 0.07392779 0.94099939
Learning_ES <- orchard_plot(mod_ES1, mod = "Type_learning", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Learning_ES
Is the assay broadly measuring learning or memory?
mod_ES2 <- rma.mv(yi = lnRR_ESa, V = VCV_ES, mod = ~Learning_vs_memory-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_ES2)
##
## Multivariate Meta-Analysis Model (k = 85; method: REML)
##
## logLik Deviance AIC BIC AICc
## -49.8593 99.7187 109.7187 121.8129 110.4979
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0282 0.1678 30 no Study_ID
## sigma^2.2 0.0338 0.1837 85 no ES_ID
## sigma^2.3 0.0052 0.0721 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 83) = 302.1628, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 83) = 3.0108, p-val = 0.0547
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Learning_vs_memoryLearning 0.2044 0.0845 2.4175 83 0.0178 0.0362
## Learning_vs_memoryMemory 0.1379 0.0719 1.9174 83 0.0586 -0.0051
## ci.ub
## Learning_vs_memoryLearning 0.3725 *
## Learning_vs_memoryMemory 0.2810 .
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES2)
## R2_marginal R2_coditional
## 0.01448953 0.92362134
LvsM_ES <- orchard_plot(mod_ES2, mod = "Learning_vs_memory", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
LvsM_ES
The type of conditioning used
VCV_ES2 <- impute_covariance_matrix(vi = dat2$lnRRV_ES, cluster = dat2$Study_ID, r = 0.5)
mod_ES3 <- rma.mv(yi = lnRR_ESa, V = VCV_ES2, mod = ~ Type_reinforcement-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat2)
summary(mod_ES3)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -49.6471 99.2943 111.2943 126.2261 112.3186
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0335 0.1831 30 no Study_ID
## sigma^2.2 0.0275 0.1658 92 no ES_ID
## sigma^2.3 0.0014 0.0372 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 288.3178, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 3.2144, p-val = 0.0267
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Type_reinforcementAppetitive 0.1482 0.1156 1.2825 89 0.2030 -0.0814
## Type_reinforcementAversive 0.1909 0.0667 2.8635 89 0.0052 0.0584
## Type_reinforcementNot applicable 0.0578 0.0798 0.7247 89 0.4705 -0.1007
## ci.ub
## Type_reinforcementAppetitive 0.3779
## Type_reinforcementAversive 0.3234 **
## Type_reinforcementNot applicable 0.2163
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES3)
## R2_marginal R2_coditional
## 0.04711742 0.97883321
Reinforcement_ES <- orchard_plot(mod_ES3, mod = "Type_reinforcement", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Reinforcement_ES
The type of stress manipulation
VCV_ES3 <- impute_covariance_matrix(vi = dat3$lnRRV_ES, cluster = dat$Study_ID, r = 0.5)
mod_ES4 <- rma.mv(yi = lnRR_ESa, V = VCV_ES3, mod = ~Type_stress_exposure-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat3)
summary(mod_ES4)
##
## Multivariate Meta-Analysis Model (k = 85; method: REML)
##
## logLik Deviance AIC BIC AICc
## -45.8298 91.6596 105.6596 122.4207 107.1939
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0559 0.2365 25 no Study_ID
## sigma^2.2 0.0320 0.1788 85 no ES_ID
## sigma^2.3 0.0221 0.1487 4 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 81) = 297.6210, p-val < .0001
##
## Test of Moderators (coefficients 1:4):
## F(df1 = 4, df2 = 81) = 0.4917, p-val = 0.7418
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Type_stress_exposureCombination 0.1140 0.1773 0.6429 81 0.5221 -0.2387
## Type_stress_exposureMS 0.1571 0.1392 1.1287 81 0.2624 -0.1198
## Type_stress_exposureNoise 0.2202 0.2133 1.0321 81 0.3051 -0.2043
## Type_stress_exposureRestraint 0.1788 0.1554 1.1508 81 0.2532 -0.1303
## ci.ub
## Type_stress_exposureCombination 0.4667
## Type_stress_exposureMS 0.4340
## Type_stress_exposureNoise 0.6447
## Type_stress_exposureRestraint 0.4880
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES4)
## R2_marginal R2_coditional
## 0.008276802 0.800570495
Stressor_ES <- orchard_plot(mod_ES4, mod = "Type_stress_exposure", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Stressor_ES
The age at which the individuals were exposed to the stressor.
mod_ES5 <-rma.mv(yi = lnRR_ESa, V = VCV_ES, mod = ~Age_stress_exposure-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_ES5)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -48.8724 97.7449 111.7449 129.0862 113.1449
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0221 0.1486 30 no Study_ID
## sigma^2.2 0.0315 0.1774 92 no ES_ID
## sigma^2.3 0.0186 0.1365 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 88) = 279.7710, p-val < .0001
##
## Test of Moderators (coefficients 1:4):
## F(df1 = 4, df2 = 88) = 1.7577, p-val = 0.1446
##
## Model Results:
##
## estimate se tval df pval
## Age_stress_exposureAdolescent 0.0288 0.2073 0.1388 88 0.8899
## Age_stress_exposureAdult 0.2096 0.1331 1.5750 88 0.1188
## Age_stress_exposureEarly postnatal 0.1402 0.1098 1.2770 88 0.2050
## Age_stress_exposurePrenatal 0.3790 0.1455 2.6059 88 0.0108
## ci.lb ci.ub
## Age_stress_exposureAdolescent -0.3832 0.4408
## Age_stress_exposureAdult -0.0549 0.4741
## Age_stress_exposureEarly postnatal -0.0780 0.3583
## Age_stress_exposurePrenatal 0.0900 0.6681 *
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES5)
## R2_marginal R2_coditional
## 0.09585233 0.76647791
Age_stress_ES<-orchard_plot(mod_ES5, mod = "Age_stress_exposure", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Age_stress_ES
How long was the stress applied for (chronic = every day for 7 days or more)? This has the highest marginal R2 (currentl nearly 43%) - need to redo without outlier
VCV_ES4 <- impute_covariance_matrix(vi = dat4$lnRRV_ES, cluster = dat4$Study_ID, r = 0.5)
mod_ES6 <-rma.mv(yi = lnRR_ESa, V = VCV_ES4, mod = ~Stress_duration-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat4)
summary(mod_ES6)
##
## Multivariate Meta-Analysis Model (k = 89; method: REML)
##
## logLik Deviance AIC BIC AICc
## -46.2033 92.4066 102.4066 114.7362 103.1474
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0136 0.1167 29 no Study_ID
## sigma^2.2 0.0351 0.1874 89 no ES_ID
## sigma^2.3 0.0104 0.1020 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 87) = 286.8908, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 87) = 4.5460, p-val = 0.0132
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## Stress_durationAcute -0.0266 0.1112 -0.2388 87 0.8118 -0.2476 0.1945
## Stress_durationChronic 0.2220 0.0828 2.6815 87 0.0088 0.0575 0.3866
##
## Stress_durationAcute
## Stress_durationChronic **
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES6)
## R2_marginal R2_coditional
## 0.1600392 0.8521674
Duration_ES<- orchard_plot(mod_ES6, mod = "Stress_duration", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Duration_ES
Does the form of enrichment include exercise through a running wheel or treadmill?
mod_ES8<- rma.mv(yi = lnRR_ESa, V = VCV_ES, mod = ~EE_exercise-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_ES8)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -50.3523 100.7046 110.7046 123.2036 111.4189
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0244 0.1561 30 no Study_ID
## sigma^2.2 0.0316 0.1778 92 no ES_ID
## sigma^2.3 0.0113 0.1062 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 90) = 294.4340, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 90) = 2.4502, p-val = 0.0920
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## EE_exerciseExercise 0.1298 0.0884 1.4687 90 0.1454 -0.0458 0.3053
## EE_exerciseNo exercise 0.2313 0.1085 2.1307 90 0.0358 0.0156 0.4469
##
## EE_exerciseExercise
## EE_exerciseNo exercise *
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES8)
## R2_marginal R2_coditional
## 0.03394203 0.83804575
Exercise_ES <- orchard_plot(mod_ES8, mod = "EE_exercise", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Exercise_ES
What order were animals exposed to stress and EE
mod_ES9 <- rma.mv(yi = lnRR_ESa, V = VCV_ES, mod = ~Exposure_order -1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_ES9)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -48.5795 97.1590 109.1590 124.0908 110.1834
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0309 0.1758 30 no Study_ID
## sigma^2.2 0.0303 0.1741 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 295.5040, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 4.1173, p-val = 0.0088
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Exposure_orderConcurrently 0.2255 0.1360 1.6583 89 0.1008 -0.0447
## Exposure_orderEnrichment first -0.2430 0.2047 -1.1871 89 0.2384 -0.6496
## Exposure_orderStress first 0.1597 0.0558 2.8623 89 0.0052 0.0488
## ci.ub
## Exposure_orderConcurrently 0.4958
## Exposure_orderEnrichment first 0.1637
## Exposure_orderStress first 0.2705 **
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES9)
## R2_marginal R2_coditional
## 0.1525456 1.0000000
Order_ES <- orchard_plot(mod_ES9, mod = "Exposure_order", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Order_ES
What age were individuals exposed to EE
VCV_ES6 <- impute_covariance_matrix(vi = dat6$lnRRV_ES, cluster = dat6$Study_ID, r = 0.5)
mod_ES10 <- rma.mv(yi = lnRR_ESa, V = VCV_ES6, mod = ~Age_EE_exposure-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat6)
summary(mod_ES10)
##
## Multivariate Meta-Analysis Model (k = 88; method: REML)
##
## logLik Deviance AIC BIC AICc
## -45.0007 90.0014 100.0014 112.2731 100.7514
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0315 0.1776 29 no Study_ID
## sigma^2.2 0.0277 0.1663 88 no ES_ID
## sigma^2.3 0.0022 0.0467 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 86) = 291.6264, p-val < .0001
##
## Test of Moderators (coefficients 1:2):
## F(df1 = 2, df2 = 86) = 3.5102, p-val = 0.0342
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Age_EE_exposureAdolescent 0.1616 0.0666 2.4263 86 0.0173 0.0292
## Age_EE_exposureAdult 0.1527 0.1041 1.4661 86 0.1463 -0.0543
## ci.ub
## Age_EE_exposureAdolescent 0.2941 *
## Age_EE_exposureAdult 0.3597
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES10)
## R2_marginal R2_coditional
## 0.0002073979 0.9645265244
Age_enrichment_ES <- orchard_plot(mod_ES10, mod = "Age_EE_exposure", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 3, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
axis.text.x = element_text(size = 10), # change font sizes
axis.text.y = element_text(size = 10),
legend.title = element_text(size = 7),
legend.text = element_text(size = 7))
Age_enrichment_ES
dat_ESfm <- dat %>%
filter(Type_learning %in% c("Recognition", "Habituation", "Conditioning"),
Type_reinforcement %in% c("Appetitive", "Aversive", "Not applicable"),
EE_social %in% c("Social", "Non-social"),
Age_EE_exposure %in% c("Adult", "Adolescent"),
Type_stress_exposure %in% c("Restraint", "Noise", "MS", "Combination"),
Stress_duration %in% c("Chronic", "Acute"),
Age_stress_exposure %in% c("Prenatal", "Early postnatal", "Adult"))
VCV_ESfm <- impute_covariance_matrix(vi = dat_ESfm$lnRRV_ES, cluster = dat$Study_ID, r = 0.5)
mod_ESfm <- rma.mv(yi = lnRR_Sa, V = VCV_ESfm, mod = ~Type_learning-1 + Learning_vs_memory-1 + Type_reinforcement-1 + EE_social-1 + EE_exercise-1 + Age_EE_exposure-1 + Type_stress_exposure-1 + Age_stress_exposure-1 + Stress_duration-1 + Exposure_order, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat_ESfm)
#summary(mod_ESfm)
#r2_ml(mod_ESfm)
res_ESfm <- dredge(mod_ESfm, trace=2)
##
|
| | 0%
|
| | 1%
|
|= | 1%
|
|= | 2%
|
|== | 2%
|
|== | 3%
|
|== | 4%
|
|=== | 4%
|
|=== | 5%
|
|==== | 5%
|
|==== | 6%
|
|===== | 6%
|
|===== | 7%
|
|===== | 8%
|
|====== | 8%
|
|====== | 9%
|
|======= | 9%
|
|======= | 10%
|
|======= | 11%
|
|======== | 11%
|
|======== | 12%
|
|========= | 12%
|
|========= | 13%
|
|========== | 14%
|
|========== | 15%
|
|=========== | 15%
|
|=========== | 16%
|
|============ | 17%
|
|============ | 18%
|
|============= | 18%
|
|============= | 19%
|
|============== | 19%
|
|============== | 20%
|
|============== | 21%
|
|=============== | 21%
|
|=============== | 22%
|
|================ | 22%
|
|================ | 23%
|
|================ | 24%
|
|================= | 24%
|
|================= | 25%
|
|================== | 25%
|
|================== | 26%
|
|=================== | 26%
|
|=================== | 27%
|
|=================== | 28%
|
|==================== | 28%
|
|==================== | 29%
|
|===================== | 29%
|
|===================== | 30%
|
|===================== | 31%
|
|====================== | 31%
|
|====================== | 32%
|
|======================= | 32%
|
|======================= | 33%
|
|======================== | 34%
|
|======================== | 35%
|
|========================= | 35%
|
|========================= | 36%
|
|========================== | 37%
|
|========================== | 38%
|
|=========================== | 38%
|
|=========================== | 39%
|
|============================ | 39%
|
|============================ | 40%
|
|============================ | 41%
|
|============================= | 41%
|
|============================= | 42%
|
|============================== | 42%
|
|============================== | 43%
|
|============================== | 44%
|
|=============================== | 44%
|
|=============================== | 45%
|
|================================ | 45%
|
|================================ | 46%
|
|================================= | 46%
|
|================================= | 47%
|
|================================= | 48%
|
|================================== | 48%
|
|================================== | 49%
|
|=================================== | 49%
|
|=================================== | 50%
|
|=================================== | 51%
|
|==================================== | 51%
|
|==================================== | 52%
|
|===================================== | 52%
|
|===================================== | 53%
|
|===================================== | 54%
|
|====================================== | 54%
|
|====================================== | 55%
|
|======================================= | 55%
|
|======================================= | 56%
|
|======================================== | 56%
|
|======================================== | 57%
|
|======================================== | 58%
|
|========================================= | 58%
|
|========================================= | 59%
|
|========================================== | 59%
|
|========================================== | 60%
|
|========================================== | 61%
|
|=========================================== | 61%
|
|=========================================== | 62%
|
|============================================ | 62%
|
|============================================ | 63%
|
|============================================= | 64%
|
|============================================= | 65%
|
|============================================== | 65%
|
|============================================== | 66%
|
|=============================================== | 67%
|
|=============================================== | 68%
|
|================================================ | 68%
|
|================================================ | 69%
|
|================================================= | 69%
|
|================================================= | 70%
|
|================================================= | 71%
|
|================================================== | 71%
|
|================================================== | 72%
|
|=================================================== | 72%
|
|=================================================== | 73%
|
|=================================================== | 74%
|
|==================================================== | 74%
|
|==================================================== | 75%
|
|===================================================== | 75%
|
|===================================================== | 76%
|
|====================================================== | 76%
|
|====================================================== | 77%
|
|====================================================== | 78%
|
|======================================================= | 78%
|
|======================================================= | 79%
|
|======================================================== | 79%
|
|======================================================== | 80%
|
|======================================================== | 81%
|
|========================================================= | 81%
|
|========================================================= | 82%
|
|========================================================== | 82%
|
|========================================================== | 83%
|
|=========================================================== | 84%
|
|=========================================================== | 85%
|
|============================================================ | 85%
|
|============================================================ | 86%
|
|============================================================= | 87%
|
|============================================================= | 88%
|
|============================================================== | 88%
|
|============================================================== | 89%
|
|=============================================================== | 89%
|
|=============================================================== | 90%
|
|=============================================================== | 91%
|
|================================================================ | 91%
|
|================================================================ | 92%
|
|================================================================= | 92%
|
|================================================================= | 93%
|
|================================================================= | 94%
|
|================================================================== | 94%
|
|================================================================== | 95%
|
|=================================================================== | 95%
|
|=================================================================== | 96%
|
|==================================================================== | 96%
|
|==================================================================== | 97%
|
|==================================================================== | 98%
|
|===================================================================== | 98%
|
|===================================================================== | 99%
|
|======================================================================| 99%
|
|======================================================================| 100%
res_ESfm2<- subset(res_ESfm, delta <= 6, recalc.weights=FALSE)
importance(res_ESfm2)
## Learning_vs_memory Stress_duration Age_EE_exposure
## Sum of weights: 0.81 0.76 0.33
## N containing models: 44 37 20
## EE_social Age_stress_exposure EE_exercise
## Sum of weights: 0.24 0.20 0.19
## N containing models: 17 10 13
## Type_stress_exposure Type_learning Exposure_order
## Sum of weights: 0.18 0.10 0.05
## N containing models: 12 10 4
## Type_reinforcement
## Sum of weights: 0.01
## N containing models: 2
Funnel_ES<-funnel(mod_ESfm, xlab = "lnRR", ylab = "Standard Error")
Funnel_ES
| x | y | slab |
|---|---|---|
| 0.2454624 | 0.3157783 | 1 |
| 0.0943498 | 0.0894747 | 2 |
| 0.0721297 | 0.1801192 | 3 |
| 0.5122413 | 0.3618139 | 4 |
| 0.0239070 | 0.1503651 | 5 |
| 0.1878325 | 0.3797763 | 6 |
| 0.5943404 | 0.7323984 | 7 |
| -0.5767662 | 0.6891293 | 8 |
| -0.3122921 | 0.2525299 | 9 |
| -0.2518584 | 0.5249337 | 10 |
| 0.1089621 | 0.2396670 | 11 |
| 0.3516907 | 0.3378860 | 12 |
| 0.8580694 | 0.4242827 | 13 |
| -0.0865945 | 0.4461403 | 14 |
| 0.3708643 | 0.4352640 | 15 |
| -0.1741041 | 0.4638069 | 16 |
| 0.0668879 | 0.1504727 | 17 |
| 0.0609943 | 0.1468498 | 18 |
| 0.0338027 | 0.1200082 | 19 |
| 0.1677212 | 0.1078907 | 20 |
| 0.4952220 | 0.3127086 | 21 |
| 0.6195042 | 0.2853655 | 22 |
| -0.0916649 | 0.7074800 | 23 |
| 0.5195184 | 0.5772046 | 24 |
| -0.2657345 | 0.6844953 | 25 |
| 0.4489273 | 0.3407547 | 26 |
| -0.0926383 | 0.4526027 | 27 |
| -0.1633041 | 0.5372787 | 28 |
| 0.4772558 | 0.6861979 | 29 |
| 0.1985885 | 0.1566093 | 30 |
| -0.1955962 | 0.3269307 | 31 |
| -0.0826405 | 0.1563503 | 32 |
| -0.4216279 | 0.3686642 | 33 |
| -0.3885711 | 0.5383601 | 34 |
| -0.0444929 | 0.4582519 | 35 |
| 0.0748647 | 0.3361419 | 36 |
| 0.0742373 | 0.1231109 | 37 |
| 0.1742661 | 0.1077770 | 38 |
| 0.0742273 | 0.1663563 | 39 |
| -0.0172416 | 0.1273065 | 40 |
| 0.1011653 | 0.1818843 | 41 |
| -0.2071035 | 0.1647651 | 42 |
| -0.0719122 | 0.1288957 | 43 |
| -0.0369818 | 0.4317613 | 44 |
| -0.0652306 | 0.0978334 | 45 |
| 0.1348330 | 0.2108519 | 50 |
| -0.2980576 | 0.5661674 | 51 |
| -0.1049073 | 0.1845759 | 52 |
| -0.0553309 | 0.2749965 | 53 |
| -0.3622396 | 0.3299643 | 54 |
| -0.3028015 | 0.3046518 | 55 |
| 0.0509961 | 0.0963694 | 56 |
| 0.0503559 | 0.1246745 | 57 |
| 0.1192930 | 0.1007085 | 58 |
| 0.0883728 | 0.1047474 | 59 |
| 0.1581607 | 0.1498879 | 60 |
| 0.1189762 | 0.1036364 | 61 |
| 0.1007871 | 0.1127202 | 62 |
| -0.6853319 | 0.4166399 | 63 |
| 0.1701198 | 0.1242665 | 64 |
| -0.3361553 | 0.3889379 | 65 |
| 0.1975977 | 0.1479352 | 66 |
| 0.4759586 | 1.0767785 | 67 |
| -0.1526495 | 0.2138277 | 68 |
| 0.2996962 | 1.0055900 | 69 |
| -0.1497525 | 0.1160067 | 70 |
| 0.1623036 | 0.1042301 | 72 |
| 0.0628564 | 0.1580502 | 73 |
| -0.0073823 | 0.1516432 | 74 |
#year published was scaled previously under stress PB
dat_ESfm$sqrt_inv_e_n <- with(dat_ESfm, sqrt(1/CC_n + 1/EC_n + 1/ES_n + 1/CS_n))
PB_MR_ES<- rma.mv(lnRR_ESa, lnRRV_ES, mods = ~1 + sqrt_inv_e_n + Year_published + Type_learning + Type_reinforcement + EE_social + EE_exercise + Age_stress_exposure, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain), method = "REML", test = "t",
data = dat_ESfm)
estimates_PB_MR_ES<- estimates.CI(PB_MR_ES)
estimates_PB_MR_ES
| estimate | mean | lower | upper |
|---|---|---|---|
| intrcpt | 44.1112313 | -25.6589074 | 113.8813700 |
| sqrt_inv_e_n | -0.6897056 | -1.8750276 | 0.4956163 |
| Year_published | -0.0216674 | -0.0562695 | 0.0129347 |
| Type_learningHabituation | 0.2280781 | -0.4019047 | 0.8580609 |
| Type_learningRecognition | -0.0104748 | -0.5598110 | 0.5388614 |
| Type_reinforcementAversive | 0.0003694 | -0.4348552 | 0.4355939 |
| Type_reinforcementNot applicable | -0.2293071 | -0.9140504 | 0.4554362 |
| EE_socialSocial | 0.1166764 | -0.2532547 | 0.4866075 |
| EE_exerciseNo exercise | 0.1207767 | -0.2456524 | 0.4872057 |
| Age_stress_exposureEarly postnatal | 0.1583410 | -0.2028311 | 0.5195131 |
| Age_stress_exposurePrenatal | 0.1954658 | -0.3134537 | 0.7043854 |
#no effect of inv_effective sample size or year published
#leave-one-out analysis
dat$Study_ID <- as.factor(dat$Study_ID)
LeaveOneOut_effectsize <- list()
for(i in 1:length(levels(dat$Study_ID))){
LeaveOneOut_effectsize[[i]] <- rma.mv(yi = lnRR_Ea, V = lnRRV_E,
random = list(~1 | Study_ID,~1| ES_ID, ~1 | Strain),
method = "REML", data = dat[dat$Study_ID
!= levels(dat$Study_ID)[i], ])}
# writing function for extracting est, ci.lb, and ci.ub from all models
est.func <- function(mod_E0){
df <- data.frame(est = mod_E0$b, lower = mod_E0$ci.lb, upper = mod_E0$ci.ub)
return(df)
}
#using dplyr to form data frame
MA_CVR <- lapply(LeaveOneOut_effectsize, function(x) est.func(x))%>% bind_rows %>% mutate(left_out = levels(dat$Study_ID))
#telling ggplot to stop reordering factors
MA_CVR$left_out<- as.factor(MA_CVR$left_out)
MA_CVR$left_out<-factor(MA_CVR$left_out, levels = MA_CVR$left_out)
#plotting
leaveoneout_ES <- ggplot(MA_CVR) +
geom_hline(yintercept = 0, lty = 2, lwd = 1) +
geom_hline(yintercept = mod_E0$ci.lb, lty = 3, lwd = 0.75, colour = "black") +
geom_hline(yintercept = mod_E0$b, lty = 1, lwd = 0.75, colour = "black") +
geom_hline(yintercept = mod_E0$ci.ub, lty = 3, lwd = 0.75, colour = "black") +
geom_pointrange(aes(x = left_out, y = est, ymin = lower, ymax = upper)) +
xlab("Study left out") +
ylab("lnRR, 95% CI") +
coord_flip() +
theme(panel.grid.minor = element_blank())+
theme_bw() + theme(panel.grid.major = element_blank()) +
theme(panel.grid.minor.x = element_blank() ) +
theme(axis.text.y = element_text(size = 6))
leaveoneout_ES
dat$Study_ID <- as.integer(dat$Study_ID)
mod_list1 <- list(mod_E0, mod_S0, mod_ES0)
mod_res1 <- lapply(mod_list1, function(x) mod_results(x, mod = "Int"))
merged1 <- submerge(mod_res1[[3]], mod_res1[[2]], mod_res1[[1]], mix = T)
merged1$mod_table$name <- factor(merged1$mod_table$name, levels = c("Intrcpt1",
"Intrcpt2", "Intrcpt3"),
labels = rev(c("Enrichment ME", "Stress ME", "Interaction")))
merged1$data$moderator <- factor(merged1$data$moderator, levels = c("Intrcpt1",
"Intrcpt2", "Intrcpt3"),
labels = rev(c("Enrichment ME", "Stress ME", "Interaction")))
orchard1<- orchard_plot(merged1, mod = "Int", xlab = "lnRR", angle = 0) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
xlim(-2,4.5) +
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
orchard1
The age at which the individuals were exposed to environmental enrichment.
This needs to be redone after recategorising
mod_ES9 <- rma.mv(yi = lnRR_ESa, V = lnRRV_ES, mod = ~Age_EE_exposure-1, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_ES9)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -47.5384 95.0767 107.0767 122.0086 108.1011
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0450 0.2122 30 no Study_ID
## sigma^2.2 0.0194 0.1393 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Residual Heterogeneity:
## QE(df = 89) = 302.2511, p-val < .0001
##
## Test of Moderators (coefficients 1:3):
## F(df1 = 3, df2 = 89) = 4.6525, p-val = 0.0046
##
## Model Results:
##
## estimate se tval df pval ci.lb
## Age_EE_exposureAdolescent 0.2006 0.0599 3.3519 89 0.0012 0.0817
## Age_EE_exposureAdult 0.1526 0.0979 1.5582 89 0.1227 -0.0420
## Age_EE_exposureEarly postnatal -0.1674 0.3086 -0.5424 89 0.5889 -0.7805
## ci.ub
## Age_EE_exposureAdolescent 0.3196 **
## Age_EE_exposureAdult 0.3472
## Age_EE_exposureEarly postnatal 0.4457
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
r2_ml(mod_ES9)
## R2_marginal R2_coditional
## 0.082042 1.000000
# Orchard plot
orchard_plot(mod_ES9, mod = "Age_EE_exposure", xlab = "lnRR", alpha=0.4) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 24), # change font sizes
legend.title = element_text(size = 15),
legend.text = element_text(size = 13))
VCV_E20 <- impute_covariance_matrix(vi = dat$lnRRV_E2, cluster = dat$Study_ID, r = 0.5)
#Model doesn't converge with VCV
mod_E20 <- rma.mv(yi = lnRR_E2a, V = VCV_E20, random = list(~1|Study_ID,
~ 1|Strain,
~1|ES_ID),
test = "t",
data = dat,
control=list(optimizer="optim", optmethod="Nelder-Mead"))
summary(mod_E20)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -80.4236 160.8471 168.8471 178.8905 169.3122
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0209 0.1444 30 no Study_ID
## sigma^2.2 0.0000 0.0001 6 no Strain
## sigma^2.3 0.0000 0.0000 92 no ES_ID
##
## Test for Heterogeneity:
## Q(df = 91) = 23.4395, p-val = 1.0000
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.1376 0.0729 1.8860 91 0.0625 -0.0073 0.2825 .
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_E20)
## I2_total I2_Study_ID I2_Strain I2_ES_ID
## 8.461245e-02 8.461241e-02 4.008740e-08 0.000000e+00
orchard_plot(mod_E20, mod = "Int", xlab = "lnRR")
VCV_S20 <- impute_covariance_matrix(vi = dat$lnRRV_S2, cluster = dat$Study_ID, r = 0.5)
mod_S20 <- rma.mv(yi = lnRR_S2a, V = VCV_S20, random = list(~1|Study_ID,
~ 1|Strain,
~1|ES_ID),
test = "t",
data = dat)
summary(mod_S20)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -89.4392 178.8785 186.8785 196.9219 187.3436
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0300 0.1732 30 no Study_ID
## sigma^2.2 0.0000 0.0000 6 no Strain
## sigma^2.3 0.0000 0.0000 92 no ES_ID
##
## Test for Heterogeneity:
## Q(df = 91) = 37.2472, p-val = 1.0000
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## -0.0735 0.0790 -0.9307 91 0.3545 -0.2304 0.0834
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_S20)
## I2_total I2_Study_ID I2_Strain I2_ES_ID
## 1.143516e-01 1.143515e-01 7.749001e-09 0.000000e+00
orchard_plot(mod_S20, mod = "Int", xlab = "lnRR")
VCV_ES20 <- impute_covariance_matrix(vi = dat$lnRRV_ES2, cluster = dat$Study_ID, r = 0.5)
mod_ES20 <- rma.mv(yi = lnRR_ES2a, V = VCV_ES20, random = list(~1|Study_ID,
~ 1|Strain,
~1|ES_ID),
test = "t",
data = dat)
summary(mod_ES20)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -86.3233 172.6465 180.6465 190.6900 181.1116
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0238 0.1541 30 no Study_ID
## sigma^2.2 0.0000 0.0000 6 no Strain
## sigma^2.3 0.0000 0.0000 92 no ES_ID
##
## Test for Heterogeneity:
## Q(df = 91) = 29.6180, p-val = 1.0000
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.1100 0.0772 1.4247 91 0.1577 -0.0434 0.2634
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_ES20)
## I2_total I2_Study_ID I2_Strain I2_ES_ID
## 8.967119e-02 8.967119e-02 4.077839e-10 1.167794e-10
orchard_plot(mod_ES20, mod = "Int", xlab = "lnRR")
VCV_E30 <- impute_covariance_matrix(vi = dat$lnRRV_E3, cluster = dat$Study_ID, r = 0.5)
mod_E30 <- rma.mv(yi = lnRR_E3a, V = VCV_E30, random = list(~1|Study_ID,
~ 1|Strain,
~1|ES_ID),
test = "t",
data = dat)
summary(mod_E30)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -90.8497 181.6995 189.6995 199.7429 190.1646
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0097 0.0986 30 no Study_ID
## sigma^2.2 0.0000 0.0001 6 no Strain
## sigma^2.3 0.0000 0.0000 92 no ES_ID
##
## Test for Heterogeneity:
## Q(df = 91) = 27.4127, p-val = 1.0000
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.1293 0.0674 1.9192 91 0.0581 -0.0045 0.2632 .
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_E30)
## I2_total I2_Study_ID I2_Strain I2_ES_ID
## 4.001741e-02 4.001738e-02 2.945906e-08 1.186013e-14
orchard_plot(mod_E30, mod = "Int", xlab = "lnRR")
VCV_S30 <- impute_covariance_matrix(vi = dat$lnRRV_S3, cluster = dat$Study_ID, r = 0.5)
mod_S30 <- rma.mv(yi = lnRR_S3a, V = VCV_S30, random = list(~1|Study_ID,
~ 1|Strain,
~1|ES_ID),
test = "t",
data = dat,
control=list(optimizer="optim", optmethod="Nelder-Mead"))
summary(mod_S30)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -80.0114 160.0229 168.0229 178.0663 168.4880
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.0000 0.0000 30 no Study_ID
## sigma^2.2 0.0000 0.0000 6 no Strain
## sigma^2.3 0.0000 0.0000 92 no ES_ID
##
## Test for Heterogeneity:
## Q(df = 91) = 15.4501, p-val = 1.0000
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## -0.0120 0.0618 -0.1942 91 0.8465 -0.1347 0.1107
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_S30)
## I2_total I2_Study_ID I2_Strain I2_ES_ID
## 0 0 0 0
orchard_plot(mod_S30, mod = "Int", xlab = "lnRR")
mod_list2 <- list(mod_S30, mod_E30, mod_ES20, mod_S20, mod_E20) #rearranged the order so that it matches intext results
mod_res2 <- lapply(mod_list2, function(x) mod_results(x, mod = "Int"))
merged2 <- submerge(mod_res2[[1]], mod_res2[[2]], mod_res2[[3]], mod_res2[[4]], mod_res2[[5]], mix = T)
merged2$mod_table$name <- factor(merged2$mod_table$name, levels = c("Intrcpt1",
"Intrcpt2", "Intrcpt3", "Intrcpt4", "Intrcpt5"),
labels = rev(c("EC/CC", "CS/CC", "ES/CC", "ES/CS", "ES/EC")))
merged2$data$moderator <- factor(merged2$data$moderator, levels = c("Intrcpt1",
"Intrcpt2", "Intrcpt3", "Intrcpt4", "Intrcpt5"),
labels = rev(c("EC/CC", "CS/CC", "ES/CC", "ES/CS", "ES/EC")))
orchard2 <- orchard_plot(merged2, mod = "Int", xlab = "lnRR", angle = 0) +
geom_errorbarh(aes(xmin = lowerPR, xmax = upperPR), height = 0, show.legend = FALSE, size = 1.1, alpha = 0.5) + # prediction intervals
geom_errorbarh(aes(xmin = lowerCL, xmax = upperCL), height = 0.05, show.legend = FALSE, size = 2) + # confidence intervals
xlim(-2,4.5) +
geom_point(aes(fill = name), size = 5, shape = 21)+ # mean estimate
scale_size_continuous(range = c(1, 7))+ # change point scaling
theme(panel.border = element_rect(colour = "black", fill=NA, size=1.3), # border around the plot
text = element_text(size = 15), # change font sizes
legend.title = element_text(size = 10),
legend.text = element_text(size = 10))
orchard2
p1 <- orchard1 + orchard2 + plot_annotation(tag_levels = 'A')
p1
#Enrichment
E_mod <- (LvsM_E + Learning_E + Reinforcement_E)/ (Age_E + Exercise_E + Social_E) + plot_annotation(tag_levels = 'A')
E_mod
S_mod <- (LvsM_S + Learning_S + Reinforcement_S) / (Age_S + Stressor + Duration_S) + plot_annotation(tag_levels = 'A')
S_mod
ES_mod <- plot_grid(LvsM_ES, Learning_ES, Reinforcement_ES, Age_enrichment_ES, Age_stress_ES, Order_ES, Exercise_ES, Social_ES, Stressor_ES, Duration_ES,
labels = "AUTO", ncol = 5)
ES_mod
# TODO - please save figures which you use for the MS in the "fig" folder
# TODO - I have not tried knitting yet
# EE
pdf(NULL)
dev.control(displaylist="enable")
par(mar=c(4,4,0.1,0))
funnel(mod_Sfm, xlab = "lnRR", ylab = "Standard Error",
xlim = c(-2,2),
ylim = c(0,1.05))
A <- recordPlot()
invisible(dev.off())
# Stress
pdf(NULL)
dev.control(displaylist="enable")
par(mar=c(4,4,0.1,0))
funnel(mod_Sfm, xlab = "lnRR", ylab = "Standard Error",
xlim = c(-2,2),
ylim = c(0,1.05))
B <- recordPlot()
invisible(dev.off())
# Interaction
pdf(NULL)
dev.control(displaylist="enable")
par(mar=c(4,4,0.1,0))
funnel(mod_ESfm, xlab = "lnRR", ylab = "Standard Error",
xlim = c(-2,2),
ylim = c(0,1.05))
C <- recordPlot()
invisible(dev.off())
# putting together
funnels <- (ggdraw(A) + ggdraw(B) + ggdraw(C)) + plot_annotation(tag_levels = 'A')
funnels
Why does the funnel plot look so strange and why is I2 higher?
mod_E0a <- rma.mv(yi = SMD_Ea, V = VCV_Ea, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_E0a)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -113.1872 226.3745 234.3745 244.4179 234.8396
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.3274 0.5722 30 no Study_ID
## sigma^2.2 0.4890 0.6993 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Heterogeneity:
## Q(df = 91) = 12552.8093, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.7290 0.1333 5.4689 91 <.0001 0.4642 0.9938 ***
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_E0a)
## I2_total I2_Study_ID I2_ES_ID I2_Strain
## 9.972502e-01 3.999606e-01 5.972896e-01 1.643432e-09
Why does the funnel plot look so strange and why is I2 higher?
mod_S0a <- rma.mv(yi = SMD_Sa, V = VCV_Sa, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_S0a)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -121.4344 242.8688 250.8688 260.9122 251.3339
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.5602 0.7484 30 no Study_ID
## sigma^2.2 0.5409 0.7355 92 no ES_ID
## sigma^2.3 0.0000 0.0000 6 no Strain
##
## Test for Heterogeneity:
## Q(df = 91) = 28682.7938, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## -0.4360 0.1627 -2.6795 91 0.0088 -0.7592 -0.1128 **
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_S0a)
## I2_total I2_Study_ID I2_ES_ID I2_Strain
## 9.979596e-01 5.076959e-01 4.902636e-01 2.171066e-09
Why does the funnel plot look so strange and why is I2 higher?
mod_ES0a <- rma.mv(yi = SMD_ESa, V = VCV_ESa, random = list(~1|Study_ID,
~1|ES_ID,
~1|Strain),
test = "t",
data = dat)
summary(mod_ES0a)
##
## Multivariate Meta-Analysis Model (k = 92; method: REML)
##
## logLik Deviance AIC BIC AICc
## -126.8085 253.6170 261.6170 271.6604 262.0821
##
## Variance Components:
##
## estim sqrt nlvls fixed factor
## sigma^2.1 0.7153 0.8457 30 no Study_ID
## sigma^2.2 0.5861 0.7656 92 no ES_ID
## sigma^2.3 0.0000 0.0001 6 no Strain
##
## Test for Heterogeneity:
## Q(df = 91) = 11648.6783, p-val < .0001
##
## Model Results:
##
## estimate se tval df pval ci.lb ci.ub
## 0.6961 0.1810 3.8449 91 0.0002 0.3365 1.0557 ***
##
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
i2_ml(mod_ES0a)
## I2_total I2_Study_ID I2_ES_ID I2_Strain
## 9.931281e-01 5.458585e-01 4.472695e-01 1.577079e-08
Social enrichment
Does EE also include a manipulation of social environment? Note that we excluded any studies that exclusively used social enrichment.s